Show the code
targets::tar_visnetwork(targets_only =T)Predictive Models for BoardGameGeek Ratings
targets::tar_visnetwork(targets_only =T)model_board = pins::board_folder("models",
versioned = T)
averageweight_fit =
vetiver_pin_read(
model_board,
"bgg_averageweight"
)
average_fit =
vetiver_pin_read(
model_board,
"bgg_average"
)
usersrated_fit =
vetiver_pin_read(
model_board,
"bgg_usersrated"
)valid_predictions |>
pivot_outcomes() |>
left_join(
games |>
bggUtils:::unnest_outcomes() |>
select(game_id, usersrated),
by = join_by(game_id)
) |>
plot_predictions(alpha = usersrated)+
theme(legend.title = element_text())targets_tracking_details(metrics = valid_metrics,
details = details) |>
select(model, minratings, outcome, any_of(c("rmse", "mae", "mape", "rsq", "ccc"))) |>
filter(minratings == 25) |>
select(minratings, everything()) |>
gt::gt() |>
gt::tab_options(quarto.disable_processing = T) |>
gtExtras::gt_theme_espn()| minratings | model | outcome | rmse | mae | mape | rsq | ccc |
|---|---|---|---|---|---|---|---|
| 25 | glmnet | average | 0.685 | 0.504 | 7.598 | 0.281 | 0.458 |
| 25 | lightgbm | averageweight | 0.445 | 0.342 | 18.849 | 0.681 | 0.815 |
| 25 | glmnet+glmnet | bayesaverage | 0.298 | 0.173 | 2.861 | 0.415 | 0.633 |
| 25 | glmnet | usersrated | 1902.399 | 469.158 | 164.699 | 0.145 | 0.378 |
predictions =
upcoming_games |>
impute_averageweight(
model = averageweight_fit
) |>
predict_bayesaverage(
average_model = average_fit,
usersrated_model = usersrated_fit
)
predictions |>
filter(yearpublished >= 2024) |>
# this goddamn bah humbug game
filter(game_id != 388225) |>
predictions_dt(games = games) |>
add_colors()